/* Restore CR4 and EFER from cached values. */
write_cr4(read_cr4());
- write_efer(read_efer());
+ if ( cpu_has_efer )
+ write_efer(read_efer());
device_power_up();
mov $0x80000001,%eax
cpuid
1: mov %edx,sym_phys(cpuid_ext_features)
+ mov %edx,sym_phys(boot_cpu_data)+CPUINFO_ext_features
#if defined(__x86_64__)
/* Check for availability of long mode. */
set_current((struct vcpu *)0xfffff000); /* debug sanity */
idle_vcpu[0] = current;
set_processor_id(0); /* needed early, for smp_processor_id() */
- rdmsrl(MSR_EFER, this_cpu(efer));
+ if ( cpu_has_efer )
+ rdmsrl(MSR_EFER, this_cpu(efer));
asm volatile ( "mov %%cr4,%0" : "=r" (this_cpu(cr4)) );
smp_prepare_boot_cpu();
set_processor_id(cpu);
set_current(idle_vcpu[cpu]);
this_cpu(curr_vcpu) = idle_vcpu[cpu];
- rdmsrl(MSR_EFER, this_cpu(efer));
+ if ( cpu_has_efer )
+ rdmsrl(MSR_EFER, this_cpu(efer));
asm volatile ( "mov %%cr4,%0" : "=r" (this_cpu(cr4)) );
percpu_traps_init();
BLANK();
DEFINE(IRQSTAT_shift, LOG_2(sizeof(irq_cpustat_t)));
+ BLANK();
+
+ OFFSET(CPUINFO_ext_features, struct cpuinfo_x86, x86_capability[1]);
}
#endif
DEFINE(IRQSTAT_shift, LOG_2(sizeof(irq_cpustat_t)));
+ BLANK();
+
+ OFFSET(CPUINFO_ext_features, struct cpuinfo_x86, x86_capability[1]);
}
#define cpu_has_centaur_mcr boot_cpu_has(X86_FEATURE_CENTAUR_MCR)
#define cpu_has_clflush boot_cpu_has(X86_FEATURE_CLFLSH)
#define cpu_has_page1gb 0
+#define cpu_has_efer (boot_cpu_data.x86_capability[1] & 0x20100800)
#else /* __x86_64__ */
#define cpu_has_vme 0
#define cpu_has_de 1
#define cpu_has_centaur_mcr 0
#define cpu_has_clflush boot_cpu_has(X86_FEATURE_CLFLSH)
#define cpu_has_page1gb boot_cpu_has(X86_FEATURE_PAGE1GB)
+#define cpu_has_efer 1
#endif
#define cpu_has_ffxsr ((boot_cpu_data.x86_vendor == X86_VENDOR_AMD) \